初期設定¶
In [1]:
"""環境"""
# .lab
"""更新履歴"""
# 最終更新日:2025/08/04
# 2025/08/04 cachedirの洗浄をオプションにしました。
# 2025/07/22 pickleを保存するために、pickleとdatetimeをインポートするようにしました。
# 2025/07/17 inputsを保存するjsonを作ることにしました
# 2025/07/17 ctypesを使ってtkinterの解像度を上げることにしました
# 2025/05/21 shutilを使って、cachedirを洗浄することにしました
# 2025/05/15 simple_progress_barおよびdata(dict)を使い始めました。
# 2025/02/25 os.getcwd()を採用し、__file__を定義する必要をなくしました。
"""モジュール読み込み"""
# ファイル操作等
import sys
import os
from datetime import datetime
from pprint import pprint
import logging
import pickle
# import struct
from tqdm import tqdm
import h5py
# import threading
import json
import shutil
# tkinter
from tkinter import filedialog, messagebox, Tk
import ctypes
ctypes.windll.shcore.SetProcessDpiAwareness(1) # tkinterの解像度を上げる
# データ分析ツール
import pandas as pd
import numpy as np
import scipy as sp
# import math
# from sklearn.linear_model import LinearRegression
# グラフ等作成用
import matplotlib
import matplotlib.pyplot as plt # 図の作成用
from PIL import Image as im
# import cv2
from IPython.display import display, HTML, clear_output, update_display, Image
# 自作モジュール
sys.path.append(r"C:\Users\okaza\pythonenv")
from modules.Mytools.Tools import print_fileinfo, h5_tree, dict_tree, simple_progress_bar, clean_cache_except_logfiles, get_total_size
import modules.Mytools.Settings
import modules.fitXRD as fx
from modules.peakfit import peakfit, pseudoVoigt
"""ログ管理ツール作成"""
# chche directoryの設定
cachedir = os.path.abspath(os.getcwd() + "/.cache")
if False:
clean_cache_except_logfiles(cachedir)
os.makedirs(cachedir, exist_ok=True)
# loggerの作成
logger = logging.getLogger(__name__)
logger.setLevel(logging.DEBUG)
format = "%(levelname)-9s %(asctime)s [%(filename)s:%(lineno)d] %(message)s"
# Streamハンドラクラスを作成
sh = logging.StreamHandler()
sh.setLevel(logging.DEBUG)
sh.setFormatter(logging.Formatter(format))
logger.addHandler(sh)
# Fileハンドラクラスをインスタンス化
logfile = cachedir + ""
fh = logging.FileHandler(filename=cachedir + "/notebook.log", encoding="utf-8")
fh.setLevel(logging.DEBUG)
fh.setFormatter(logging.Formatter(format))
logger.addHandler(fh)
logger.debug("[Activate workspace]: " + os.getcwd())
logger.debug("[Set cache]: "+ os.path.abspath(cachedir))
logger.debug("[Activate log]: " + os.path.abspath(cachedir + "/notebook.log"))
# globalなデータを作成
data = dict()
logger.debug("[Create global variable]: data")
# inputをjsonファイルに保存する
data["inputs"] = os.path.join(cachedir, "input.json")
with open(data["inputs"], mode = "w") as f:
json.dump(dict(), f)
logger.info("[Set json for save inputs]: " + os.path.abspath(data["inputs"]) )
DEBUG 2025-08-04 14:09:40,352 [1617289364.py:79] [Activate workspace]: c:\Users\okaza\pythonenv\fpd\fit_xrd_auto DEBUG 2025-08-04 14:09:40,355 [1617289364.py:80] [Set cache]: c:\Users\okaza\pythonenv\fpd\fit_xrd_auto\.cache DEBUG 2025-08-04 14:09:40,357 [1617289364.py:81] [Activate log]: c:\Users\okaza\pythonenv\fpd\fit_xrd_auto\.cache\notebook.log DEBUG 2025-08-04 14:09:40,359 [1617289364.py:85] [Create global variable]: data INFO 2025-08-04 14:09:40,364 [1617289364.py:91] [Set json for save inputs]: c:\Users\okaza\pythonenv\fpd\fit_xrd_auto\.cache\input.json
生データの保存
In [ ]:
def save_pickle():
"""変更履歴
* 2025/08/05: defaultextensitionを追加、initialdirの削除
* 2025/08/01: 作成ののち変更
"""
# 現在時刻の取得
dt = datetime.now()
initfilename = dt.strftime('%Y%m%d%H%M%S%f') + ".pkl"
# tkinterでファイルダイアログを開く
window = Tk()
window.wm_attributes("-topmost", 1)
window.withdraw()
filename = filedialog.asksaveasfilename(
parent = window,
filetypes = [
("pkl", "*.pkl"),
],
initialfile=initfilename,
defaultextension=".pkl",
)
# filenameがセットされたら保存する
if not (filename == ""):
with open(filename, mode = "wb") as f:
pickle.dump(
obj = data,
file = f
)
logger.info("[Save data]: " + os.path.abspath(filename))
return
save_pickle()
del save_pickle
INFO 2025-08-05 10:53:48,079 [2013597588.py:32] [Save data]: c:\Users\okaza\pythonenv\fpd\fit_xrd_auto
生データの読み込み
In [5]:
def load_pickle():
"""変更履歴
* 2025/08/04: dataの上書きができていませんでした
* 2025/07/25: 作成
"""
# tkinterでファイルダイアログを開く
window = Tk()
window.wm_attributes("-topmost", 1)
window.withdraw()
filename = filedialog.askopenfilename(
parent = window,
filetypes = [
("pkl", "*.pkl"),
],
)
global data
if not (filename == ""):
with open(filename, mode = "rb") as f:
data = pickle.load(
file = f
)
logger.info("[Load]: " + os.path.abspath(filename))
dict_tree(data)
return
load_pickle()
del load_pickle
INFO 2025-08-04 14:11:38,452 [1452740726.py:24] [Load]: C:\Users\okaza\pythonenv\fpd\fit_xrd_auto\20250801182000584489.pkl
├── inputs <class 'str'>
├── set_filelist
│ ├── dir <class 'str'>
│ ├── flist <class 'list'>
│ ├── header <class 'str'>
│ └── footer <class 'str'>
├── convertCSV2HDF
│ └── hdf <class 'str'>
├── set_fitlim
│ ├── peakname <class 'str'>
│ ├── theta_range <class 'tuple'>
│ ├── aset <class 'float'>
│ ├── size_inches <class 'tuple'>
│ ├── fit_range <class 'tuple'>
│ └── ylim <class 'tuple'>
├── autofit
│ └── res <class 'list'>
└── export_html
└── ipynb <class 'str'>
目的¶
csvファイルを読み込んでフィッティングを行います。
1. ファイルの読み込み¶
まずファイルリストを作成します。
In [ ]:
def set_filelist():
# ディレクトリ名を指定
dir = r"C:\Users\okaza\Box\DataStrorage\2025_06_BL10XU\Okazaki\UODE15\FPD_1"
# ヘッダーとフッターを指定
header = "UODE15_5_"
footer = ".csv"
if True: # Main
"""変更履歴
* 2025/08/01: 変更履歴作成
"""
# inputの保存
json_input = data["inputs"]
with open(json_input, mode = "r") as f:
inputs = json.load(f)
key = sys._getframe().f_code.co_name
inputs[key] = dict()
inputs[key]["dir"] = dir
inputs[key]["header"] = header
inputs[key]["footer"] = footer
with open(json_input, mode = "w") as f:
json.dump(inputs, f, indent = 4)
# ヘッダーとフッターを含むファイル名を取得
flist = list()
for __ in os.listdir(dir):
if not header in __:
continue
if not footer in __:
continue
flist.append(__)
logger.debug("Read filenames")
# ソート
flist.sort(key = (lambda x: int(x.replace(header, "").replace(footer, ""))))
logger.debug("Sort the file name list")
# 表示
flist = flist[:442]
for f in flist:
print(os.path.abspath(dir + "/" + f))
# 格納
key = sys._getframe().f_code.co_name
data[key] = dict()
data[key]["dir"] = dir
data[key]["flist"] = flist
data[key]["header"] = header
data[key]["footer"] = footer
logger.info("[Add variables]: " + key)
dict_tree(data)
return
set_filelist()
del set_filelist
DEBUG 2025-08-01 13:21:07,658 [2563800268.py:32] Read filenames DEBUG 2025-08-01 13:21:07,659 [2563800268.py:36] Sort the file name list INFO 2025-08-01 13:21:07,660 [2563800268.py:50] [Add variables]: set_filelist
C:\Users\okaza\Box\DataStrorage\2025_06_BL10XU\Okazaki\UODE15\FPD_1\UODE15_5_0.csv
C:\Users\okaza\Box\DataStrorage\2025_06_BL10XU\Okazaki\UODE15\FPD_1\UODE15_5_1.csv
C:\Users\okaza\Box\DataStrorage\2025_06_BL10XU\Okazaki\UODE15\FPD_1\UODE15_5_2.csv
C:\Users\okaza\Box\DataStrorage\2025_06_BL10XU\Okazaki\UODE15\FPD_1\UODE15_5_3.csv
C:\Users\okaza\Box\DataStrorage\2025_06_BL10XU\Okazaki\UODE15\FPD_1\UODE15_5_4.csv
C:\Users\okaza\Box\DataStrorage\2025_06_BL10XU\Okazaki\UODE15\FPD_1\UODE15_5_5.csv
C:\Users\okaza\Box\DataStrorage\2025_06_BL10XU\Okazaki\UODE15\FPD_1\UODE15_5_6.csv
C:\Users\okaza\Box\DataStrorage\2025_06_BL10XU\Okazaki\UODE15\FPD_1\UODE15_5_7.csv
C:\Users\okaza\Box\DataStrorage\2025_06_BL10XU\Okazaki\UODE15\FPD_1\UODE15_5_8.csv
C:\Users\okaza\Box\DataStrorage\2025_06_BL10XU\Okazaki\UODE15\FPD_1\UODE15_5_9.csv
C:\Users\okaza\Box\DataStrorage\2025_06_BL10XU\Okazaki\UODE15\FPD_1\UODE15_5_10.csv
C:\Users\okaza\Box\DataStrorage\2025_06_BL10XU\Okazaki\UODE15\FPD_1\UODE15_5_11.csv
C:\Users\okaza\Box\DataStrorage\2025_06_BL10XU\Okazaki\UODE15\FPD_1\UODE15_5_12.csv
C:\Users\okaza\Box\DataStrorage\2025_06_BL10XU\Okazaki\UODE15\FPD_1\UODE15_5_13.csv
C:\Users\okaza\Box\DataStrorage\2025_06_BL10XU\Okazaki\UODE15\FPD_1\UODE15_5_14.csv
C:\Users\okaza\Box\DataStrorage\2025_06_BL10XU\Okazaki\UODE15\FPD_1\UODE15_5_15.csv
C:\Users\okaza\Box\DataStrorage\2025_06_BL10XU\Okazaki\UODE15\FPD_1\UODE15_5_16.csv
C:\Users\okaza\Box\DataStrorage\2025_06_BL10XU\Okazaki\UODE15\FPD_1\UODE15_5_17.csv
C:\Users\okaza\Box\DataStrorage\2025_06_BL10XU\Okazaki\UODE15\FPD_1\UODE15_5_18.csv
C:\Users\okaza\Box\DataStrorage\2025_06_BL10XU\Okazaki\UODE15\FPD_1\UODE15_5_19.csv
C:\Users\okaza\Box\DataStrorage\2025_06_BL10XU\Okazaki\UODE15\FPD_1\UODE15_5_20.csv
C:\Users\okaza\Box\DataStrorage\2025_06_BL10XU\Okazaki\UODE15\FPD_1\UODE15_5_21.csv
C:\Users\okaza\Box\DataStrorage\2025_06_BL10XU\Okazaki\UODE15\FPD_1\UODE15_5_22.csv
C:\Users\okaza\Box\DataStrorage\2025_06_BL10XU\Okazaki\UODE15\FPD_1\UODE15_5_23.csv
C:\Users\okaza\Box\DataStrorage\2025_06_BL10XU\Okazaki\UODE15\FPD_1\UODE15_5_24.csv
C:\Users\okaza\Box\DataStrorage\2025_06_BL10XU\Okazaki\UODE15\FPD_1\UODE15_5_25.csv
C:\Users\okaza\Box\DataStrorage\2025_06_BL10XU\Okazaki\UODE15\FPD_1\UODE15_5_26.csv
C:\Users\okaza\Box\DataStrorage\2025_06_BL10XU\Okazaki\UODE15\FPD_1\UODE15_5_27.csv
C:\Users\okaza\Box\DataStrorage\2025_06_BL10XU\Okazaki\UODE15\FPD_1\UODE15_5_28.csv
C:\Users\okaza\Box\DataStrorage\2025_06_BL10XU\Okazaki\UODE15\FPD_1\UODE15_5_29.csv
C:\Users\okaza\Box\DataStrorage\2025_06_BL10XU\Okazaki\UODE15\FPD_1\UODE15_5_30.csv
C:\Users\okaza\Box\DataStrorage\2025_06_BL10XU\Okazaki\UODE15\FPD_1\UODE15_5_31.csv
C:\Users\okaza\Box\DataStrorage\2025_06_BL10XU\Okazaki\UODE15\FPD_1\UODE15_5_32.csv
C:\Users\okaza\Box\DataStrorage\2025_06_BL10XU\Okazaki\UODE15\FPD_1\UODE15_5_33.csv
C:\Users\okaza\Box\DataStrorage\2025_06_BL10XU\Okazaki\UODE15\FPD_1\UODE15_5_34.csv
C:\Users\okaza\Box\DataStrorage\2025_06_BL10XU\Okazaki\UODE15\FPD_1\UODE15_5_35.csv
C:\Users\okaza\Box\DataStrorage\2025_06_BL10XU\Okazaki\UODE15\FPD_1\UODE15_5_36.csv
C:\Users\okaza\Box\DataStrorage\2025_06_BL10XU\Okazaki\UODE15\FPD_1\UODE15_5_37.csv
C:\Users\okaza\Box\DataStrorage\2025_06_BL10XU\Okazaki\UODE15\FPD_1\UODE15_5_38.csv
C:\Users\okaza\Box\DataStrorage\2025_06_BL10XU\Okazaki\UODE15\FPD_1\UODE15_5_39.csv
C:\Users\okaza\Box\DataStrorage\2025_06_BL10XU\Okazaki\UODE15\FPD_1\UODE15_5_40.csv
C:\Users\okaza\Box\DataStrorage\2025_06_BL10XU\Okazaki\UODE15\FPD_1\UODE15_5_41.csv
C:\Users\okaza\Box\DataStrorage\2025_06_BL10XU\Okazaki\UODE15\FPD_1\UODE15_5_42.csv
C:\Users\okaza\Box\DataStrorage\2025_06_BL10XU\Okazaki\UODE15\FPD_1\UODE15_5_43.csv
C:\Users\okaza\Box\DataStrorage\2025_06_BL10XU\Okazaki\UODE15\FPD_1\UODE15_5_44.csv
C:\Users\okaza\Box\DataStrorage\2025_06_BL10XU\Okazaki\UODE15\FPD_1\UODE15_5_45.csv
C:\Users\okaza\Box\DataStrorage\2025_06_BL10XU\Okazaki\UODE15\FPD_1\UODE15_5_46.csv
C:\Users\okaza\Box\DataStrorage\2025_06_BL10XU\Okazaki\UODE15\FPD_1\UODE15_5_47.csv
C:\Users\okaza\Box\DataStrorage\2025_06_BL10XU\Okazaki\UODE15\FPD_1\UODE15_5_48.csv
C:\Users\okaza\Box\DataStrorage\2025_06_BL10XU\Okazaki\UODE15\FPD_1\UODE15_5_49.csv
C:\Users\okaza\Box\DataStrorage\2025_06_BL10XU\Okazaki\UODE15\FPD_1\UODE15_5_50.csv
C:\Users\okaza\Box\DataStrorage\2025_06_BL10XU\Okazaki\UODE15\FPD_1\UODE15_5_51.csv
C:\Users\okaza\Box\DataStrorage\2025_06_BL10XU\Okazaki\UODE15\FPD_1\UODE15_5_52.csv
C:\Users\okaza\Box\DataStrorage\2025_06_BL10XU\Okazaki\UODE15\FPD_1\UODE15_5_53.csv
C:\Users\okaza\Box\DataStrorage\2025_06_BL10XU\Okazaki\UODE15\FPD_1\UODE15_5_54.csv
C:\Users\okaza\Box\DataStrorage\2025_06_BL10XU\Okazaki\UODE15\FPD_1\UODE15_5_55.csv
C:\Users\okaza\Box\DataStrorage\2025_06_BL10XU\Okazaki\UODE15\FPD_1\UODE15_5_56.csv
C:\Users\okaza\Box\DataStrorage\2025_06_BL10XU\Okazaki\UODE15\FPD_1\UODE15_5_57.csv
C:\Users\okaza\Box\DataStrorage\2025_06_BL10XU\Okazaki\UODE15\FPD_1\UODE15_5_58.csv
C:\Users\okaza\Box\DataStrorage\2025_06_BL10XU\Okazaki\UODE15\FPD_1\UODE15_5_59.csv
C:\Users\okaza\Box\DataStrorage\2025_06_BL10XU\Okazaki\UODE15\FPD_1\UODE15_5_60.csv
C:\Users\okaza\Box\DataStrorage\2025_06_BL10XU\Okazaki\UODE15\FPD_1\UODE15_5_61.csv
C:\Users\okaza\Box\DataStrorage\2025_06_BL10XU\Okazaki\UODE15\FPD_1\UODE15_5_62.csv
C:\Users\okaza\Box\DataStrorage\2025_06_BL10XU\Okazaki\UODE15\FPD_1\UODE15_5_63.csv
C:\Users\okaza\Box\DataStrorage\2025_06_BL10XU\Okazaki\UODE15\FPD_1\UODE15_5_64.csv
C:\Users\okaza\Box\DataStrorage\2025_06_BL10XU\Okazaki\UODE15\FPD_1\UODE15_5_65.csv
C:\Users\okaza\Box\DataStrorage\2025_06_BL10XU\Okazaki\UODE15\FPD_1\UODE15_5_66.csv
C:\Users\okaza\Box\DataStrorage\2025_06_BL10XU\Okazaki\UODE15\FPD_1\UODE15_5_67.csv
C:\Users\okaza\Box\DataStrorage\2025_06_BL10XU\Okazaki\UODE15\FPD_1\UODE15_5_68.csv
C:\Users\okaza\Box\DataStrorage\2025_06_BL10XU\Okazaki\UODE15\FPD_1\UODE15_5_69.csv
C:\Users\okaza\Box\DataStrorage\2025_06_BL10XU\Okazaki\UODE15\FPD_1\UODE15_5_70.csv
C:\Users\okaza\Box\DataStrorage\2025_06_BL10XU\Okazaki\UODE15\FPD_1\UODE15_5_71.csv
C:\Users\okaza\Box\DataStrorage\2025_06_BL10XU\Okazaki\UODE15\FPD_1\UODE15_5_72.csv
C:\Users\okaza\Box\DataStrorage\2025_06_BL10XU\Okazaki\UODE15\FPD_1\UODE15_5_73.csv
C:\Users\okaza\Box\DataStrorage\2025_06_BL10XU\Okazaki\UODE15\FPD_1\UODE15_5_74.csv
C:\Users\okaza\Box\DataStrorage\2025_06_BL10XU\Okazaki\UODE15\FPD_1\UODE15_5_75.csv
C:\Users\okaza\Box\DataStrorage\2025_06_BL10XU\Okazaki\UODE15\FPD_1\UODE15_5_76.csv
C:\Users\okaza\Box\DataStrorage\2025_06_BL10XU\Okazaki\UODE15\FPD_1\UODE15_5_77.csv
C:\Users\okaza\Box\DataStrorage\2025_06_BL10XU\Okazaki\UODE15\FPD_1\UODE15_5_78.csv
C:\Users\okaza\Box\DataStrorage\2025_06_BL10XU\Okazaki\UODE15\FPD_1\UODE15_5_79.csv
C:\Users\okaza\Box\DataStrorage\2025_06_BL10XU\Okazaki\UODE15\FPD_1\UODE15_5_80.csv
C:\Users\okaza\Box\DataStrorage\2025_06_BL10XU\Okazaki\UODE15\FPD_1\UODE15_5_81.csv
C:\Users\okaza\Box\DataStrorage\2025_06_BL10XU\Okazaki\UODE15\FPD_1\UODE15_5_82.csv
C:\Users\okaza\Box\DataStrorage\2025_06_BL10XU\Okazaki\UODE15\FPD_1\UODE15_5_83.csv
C:\Users\okaza\Box\DataStrorage\2025_06_BL10XU\Okazaki\UODE15\FPD_1\UODE15_5_84.csv
C:\Users\okaza\Box\DataStrorage\2025_06_BL10XU\Okazaki\UODE15\FPD_1\UODE15_5_85.csv
C:\Users\okaza\Box\DataStrorage\2025_06_BL10XU\Okazaki\UODE15\FPD_1\UODE15_5_86.csv
C:\Users\okaza\Box\DataStrorage\2025_06_BL10XU\Okazaki\UODE15\FPD_1\UODE15_5_87.csv
C:\Users\okaza\Box\DataStrorage\2025_06_BL10XU\Okazaki\UODE15\FPD_1\UODE15_5_88.csv
C:\Users\okaza\Box\DataStrorage\2025_06_BL10XU\Okazaki\UODE15\FPD_1\UODE15_5_89.csv
C:\Users\okaza\Box\DataStrorage\2025_06_BL10XU\Okazaki\UODE15\FPD_1\UODE15_5_90.csv
C:\Users\okaza\Box\DataStrorage\2025_06_BL10XU\Okazaki\UODE15\FPD_1\UODE15_5_91.csv
C:\Users\okaza\Box\DataStrorage\2025_06_BL10XU\Okazaki\UODE15\FPD_1\UODE15_5_92.csv
C:\Users\okaza\Box\DataStrorage\2025_06_BL10XU\Okazaki\UODE15\FPD_1\UODE15_5_93.csv
C:\Users\okaza\Box\DataStrorage\2025_06_BL10XU\Okazaki\UODE15\FPD_1\UODE15_5_94.csv
C:\Users\okaza\Box\DataStrorage\2025_06_BL10XU\Okazaki\UODE15\FPD_1\UODE15_5_95.csv
C:\Users\okaza\Box\DataStrorage\2025_06_BL10XU\Okazaki\UODE15\FPD_1\UODE15_5_96.csv
C:\Users\okaza\Box\DataStrorage\2025_06_BL10XU\Okazaki\UODE15\FPD_1\UODE15_5_97.csv
C:\Users\okaza\Box\DataStrorage\2025_06_BL10XU\Okazaki\UODE15\FPD_1\UODE15_5_98.csv
C:\Users\okaza\Box\DataStrorage\2025_06_BL10XU\Okazaki\UODE15\FPD_1\UODE15_5_99.csv
C:\Users\okaza\Box\DataStrorage\2025_06_BL10XU\Okazaki\UODE15\FPD_1\UODE15_5_100.csv
C:\Users\okaza\Box\DataStrorage\2025_06_BL10XU\Okazaki\UODE15\FPD_1\UODE15_5_101.csv
C:\Users\okaza\Box\DataStrorage\2025_06_BL10XU\Okazaki\UODE15\FPD_1\UODE15_5_102.csv
C:\Users\okaza\Box\DataStrorage\2025_06_BL10XU\Okazaki\UODE15\FPD_1\UODE15_5_103.csv
C:\Users\okaza\Box\DataStrorage\2025_06_BL10XU\Okazaki\UODE15\FPD_1\UODE15_5_104.csv
C:\Users\okaza\Box\DataStrorage\2025_06_BL10XU\Okazaki\UODE15\FPD_1\UODE15_5_105.csv
C:\Users\okaza\Box\DataStrorage\2025_06_BL10XU\Okazaki\UODE15\FPD_1\UODE15_5_106.csv
C:\Users\okaza\Box\DataStrorage\2025_06_BL10XU\Okazaki\UODE15\FPD_1\UODE15_5_107.csv
C:\Users\okaza\Box\DataStrorage\2025_06_BL10XU\Okazaki\UODE15\FPD_1\UODE15_5_108.csv
C:\Users\okaza\Box\DataStrorage\2025_06_BL10XU\Okazaki\UODE15\FPD_1\UODE15_5_109.csv
C:\Users\okaza\Box\DataStrorage\2025_06_BL10XU\Okazaki\UODE15\FPD_1\UODE15_5_110.csv
C:\Users\okaza\Box\DataStrorage\2025_06_BL10XU\Okazaki\UODE15\FPD_1\UODE15_5_111.csv
C:\Users\okaza\Box\DataStrorage\2025_06_BL10XU\Okazaki\UODE15\FPD_1\UODE15_5_112.csv
C:\Users\okaza\Box\DataStrorage\2025_06_BL10XU\Okazaki\UODE15\FPD_1\UODE15_5_113.csv
C:\Users\okaza\Box\DataStrorage\2025_06_BL10XU\Okazaki\UODE15\FPD_1\UODE15_5_114.csv
C:\Users\okaza\Box\DataStrorage\2025_06_BL10XU\Okazaki\UODE15\FPD_1\UODE15_5_115.csv
C:\Users\okaza\Box\DataStrorage\2025_06_BL10XU\Okazaki\UODE15\FPD_1\UODE15_5_116.csv
C:\Users\okaza\Box\DataStrorage\2025_06_BL10XU\Okazaki\UODE15\FPD_1\UODE15_5_117.csv
C:\Users\okaza\Box\DataStrorage\2025_06_BL10XU\Okazaki\UODE15\FPD_1\UODE15_5_118.csv
C:\Users\okaza\Box\DataStrorage\2025_06_BL10XU\Okazaki\UODE15\FPD_1\UODE15_5_119.csv
C:\Users\okaza\Box\DataStrorage\2025_06_BL10XU\Okazaki\UODE15\FPD_1\UODE15_5_120.csv
C:\Users\okaza\Box\DataStrorage\2025_06_BL10XU\Okazaki\UODE15\FPD_1\UODE15_5_121.csv
C:\Users\okaza\Box\DataStrorage\2025_06_BL10XU\Okazaki\UODE15\FPD_1\UODE15_5_122.csv
C:\Users\okaza\Box\DataStrorage\2025_06_BL10XU\Okazaki\UODE15\FPD_1\UODE15_5_123.csv
C:\Users\okaza\Box\DataStrorage\2025_06_BL10XU\Okazaki\UODE15\FPD_1\UODE15_5_124.csv
C:\Users\okaza\Box\DataStrorage\2025_06_BL10XU\Okazaki\UODE15\FPD_1\UODE15_5_125.csv
C:\Users\okaza\Box\DataStrorage\2025_06_BL10XU\Okazaki\UODE15\FPD_1\UODE15_5_126.csv
C:\Users\okaza\Box\DataStrorage\2025_06_BL10XU\Okazaki\UODE15\FPD_1\UODE15_5_127.csv
C:\Users\okaza\Box\DataStrorage\2025_06_BL10XU\Okazaki\UODE15\FPD_1\UODE15_5_128.csv
C:\Users\okaza\Box\DataStrorage\2025_06_BL10XU\Okazaki\UODE15\FPD_1\UODE15_5_129.csv
C:\Users\okaza\Box\DataStrorage\2025_06_BL10XU\Okazaki\UODE15\FPD_1\UODE15_5_130.csv
C:\Users\okaza\Box\DataStrorage\2025_06_BL10XU\Okazaki\UODE15\FPD_1\UODE15_5_131.csv
C:\Users\okaza\Box\DataStrorage\2025_06_BL10XU\Okazaki\UODE15\FPD_1\UODE15_5_132.csv
C:\Users\okaza\Box\DataStrorage\2025_06_BL10XU\Okazaki\UODE15\FPD_1\UODE15_5_133.csv
C:\Users\okaza\Box\DataStrorage\2025_06_BL10XU\Okazaki\UODE15\FPD_1\UODE15_5_134.csv
C:\Users\okaza\Box\DataStrorage\2025_06_BL10XU\Okazaki\UODE15\FPD_1\UODE15_5_135.csv
C:\Users\okaza\Box\DataStrorage\2025_06_BL10XU\Okazaki\UODE15\FPD_1\UODE15_5_136.csv
C:\Users\okaza\Box\DataStrorage\2025_06_BL10XU\Okazaki\UODE15\FPD_1\UODE15_5_137.csv
C:\Users\okaza\Box\DataStrorage\2025_06_BL10XU\Okazaki\UODE15\FPD_1\UODE15_5_138.csv
C:\Users\okaza\Box\DataStrorage\2025_06_BL10XU\Okazaki\UODE15\FPD_1\UODE15_5_139.csv
C:\Users\okaza\Box\DataStrorage\2025_06_BL10XU\Okazaki\UODE15\FPD_1\UODE15_5_140.csv
C:\Users\okaza\Box\DataStrorage\2025_06_BL10XU\Okazaki\UODE15\FPD_1\UODE15_5_141.csv
C:\Users\okaza\Box\DataStrorage\2025_06_BL10XU\Okazaki\UODE15\FPD_1\UODE15_5_142.csv
C:\Users\okaza\Box\DataStrorage\2025_06_BL10XU\Okazaki\UODE15\FPD_1\UODE15_5_143.csv
C:\Users\okaza\Box\DataStrorage\2025_06_BL10XU\Okazaki\UODE15\FPD_1\UODE15_5_144.csv
C:\Users\okaza\Box\DataStrorage\2025_06_BL10XU\Okazaki\UODE15\FPD_1\UODE15_5_145.csv
C:\Users\okaza\Box\DataStrorage\2025_06_BL10XU\Okazaki\UODE15\FPD_1\UODE15_5_146.csv
C:\Users\okaza\Box\DataStrorage\2025_06_BL10XU\Okazaki\UODE15\FPD_1\UODE15_5_147.csv
C:\Users\okaza\Box\DataStrorage\2025_06_BL10XU\Okazaki\UODE15\FPD_1\UODE15_5_148.csv
C:\Users\okaza\Box\DataStrorage\2025_06_BL10XU\Okazaki\UODE15\FPD_1\UODE15_5_149.csv
C:\Users\okaza\Box\DataStrorage\2025_06_BL10XU\Okazaki\UODE15\FPD_1\UODE15_5_150.csv
C:\Users\okaza\Box\DataStrorage\2025_06_BL10XU\Okazaki\UODE15\FPD_1\UODE15_5_151.csv
C:\Users\okaza\Box\DataStrorage\2025_06_BL10XU\Okazaki\UODE15\FPD_1\UODE15_5_152.csv
C:\Users\okaza\Box\DataStrorage\2025_06_BL10XU\Okazaki\UODE15\FPD_1\UODE15_5_153.csv
C:\Users\okaza\Box\DataStrorage\2025_06_BL10XU\Okazaki\UODE15\FPD_1\UODE15_5_154.csv
C:\Users\okaza\Box\DataStrorage\2025_06_BL10XU\Okazaki\UODE15\FPD_1\UODE15_5_155.csv
C:\Users\okaza\Box\DataStrorage\2025_06_BL10XU\Okazaki\UODE15\FPD_1\UODE15_5_156.csv
C:\Users\okaza\Box\DataStrorage\2025_06_BL10XU\Okazaki\UODE15\FPD_1\UODE15_5_157.csv
C:\Users\okaza\Box\DataStrorage\2025_06_BL10XU\Okazaki\UODE15\FPD_1\UODE15_5_158.csv
C:\Users\okaza\Box\DataStrorage\2025_06_BL10XU\Okazaki\UODE15\FPD_1\UODE15_5_159.csv
C:\Users\okaza\Box\DataStrorage\2025_06_BL10XU\Okazaki\UODE15\FPD_1\UODE15_5_160.csv
C:\Users\okaza\Box\DataStrorage\2025_06_BL10XU\Okazaki\UODE15\FPD_1\UODE15_5_161.csv
C:\Users\okaza\Box\DataStrorage\2025_06_BL10XU\Okazaki\UODE15\FPD_1\UODE15_5_162.csv
C:\Users\okaza\Box\DataStrorage\2025_06_BL10XU\Okazaki\UODE15\FPD_1\UODE15_5_163.csv
C:\Users\okaza\Box\DataStrorage\2025_06_BL10XU\Okazaki\UODE15\FPD_1\UODE15_5_164.csv
C:\Users\okaza\Box\DataStrorage\2025_06_BL10XU\Okazaki\UODE15\FPD_1\UODE15_5_165.csv
C:\Users\okaza\Box\DataStrorage\2025_06_BL10XU\Okazaki\UODE15\FPD_1\UODE15_5_166.csv
C:\Users\okaza\Box\DataStrorage\2025_06_BL10XU\Okazaki\UODE15\FPD_1\UODE15_5_167.csv
C:\Users\okaza\Box\DataStrorage\2025_06_BL10XU\Okazaki\UODE15\FPD_1\UODE15_5_168.csv
C:\Users\okaza\Box\DataStrorage\2025_06_BL10XU\Okazaki\UODE15\FPD_1\UODE15_5_169.csv
C:\Users\okaza\Box\DataStrorage\2025_06_BL10XU\Okazaki\UODE15\FPD_1\UODE15_5_170.csv
C:\Users\okaza\Box\DataStrorage\2025_06_BL10XU\Okazaki\UODE15\FPD_1\UODE15_5_171.csv
C:\Users\okaza\Box\DataStrorage\2025_06_BL10XU\Okazaki\UODE15\FPD_1\UODE15_5_172.csv
C:\Users\okaza\Box\DataStrorage\2025_06_BL10XU\Okazaki\UODE15\FPD_1\UODE15_5_173.csv
C:\Users\okaza\Box\DataStrorage\2025_06_BL10XU\Okazaki\UODE15\FPD_1\UODE15_5_174.csv
C:\Users\okaza\Box\DataStrorage\2025_06_BL10XU\Okazaki\UODE15\FPD_1\UODE15_5_175.csv
C:\Users\okaza\Box\DataStrorage\2025_06_BL10XU\Okazaki\UODE15\FPD_1\UODE15_5_176.csv
C:\Users\okaza\Box\DataStrorage\2025_06_BL10XU\Okazaki\UODE15\FPD_1\UODE15_5_177.csv
C:\Users\okaza\Box\DataStrorage\2025_06_BL10XU\Okazaki\UODE15\FPD_1\UODE15_5_178.csv
C:\Users\okaza\Box\DataStrorage\2025_06_BL10XU\Okazaki\UODE15\FPD_1\UODE15_5_179.csv
C:\Users\okaza\Box\DataStrorage\2025_06_BL10XU\Okazaki\UODE15\FPD_1\UODE15_5_180.csv
C:\Users\okaza\Box\DataStrorage\2025_06_BL10XU\Okazaki\UODE15\FPD_1\UODE15_5_181.csv
C:\Users\okaza\Box\DataStrorage\2025_06_BL10XU\Okazaki\UODE15\FPD_1\UODE15_5_182.csv
C:\Users\okaza\Box\DataStrorage\2025_06_BL10XU\Okazaki\UODE15\FPD_1\UODE15_5_183.csv
C:\Users\okaza\Box\DataStrorage\2025_06_BL10XU\Okazaki\UODE15\FPD_1\UODE15_5_184.csv
C:\Users\okaza\Box\DataStrorage\2025_06_BL10XU\Okazaki\UODE15\FPD_1\UODE15_5_185.csv
C:\Users\okaza\Box\DataStrorage\2025_06_BL10XU\Okazaki\UODE15\FPD_1\UODE15_5_186.csv
C:\Users\okaza\Box\DataStrorage\2025_06_BL10XU\Okazaki\UODE15\FPD_1\UODE15_5_187.csv
C:\Users\okaza\Box\DataStrorage\2025_06_BL10XU\Okazaki\UODE15\FPD_1\UODE15_5_188.csv
C:\Users\okaza\Box\DataStrorage\2025_06_BL10XU\Okazaki\UODE15\FPD_1\UODE15_5_189.csv
C:\Users\okaza\Box\DataStrorage\2025_06_BL10XU\Okazaki\UODE15\FPD_1\UODE15_5_190.csv
C:\Users\okaza\Box\DataStrorage\2025_06_BL10XU\Okazaki\UODE15\FPD_1\UODE15_5_191.csv
C:\Users\okaza\Box\DataStrorage\2025_06_BL10XU\Okazaki\UODE15\FPD_1\UODE15_5_192.csv
C:\Users\okaza\Box\DataStrorage\2025_06_BL10XU\Okazaki\UODE15\FPD_1\UODE15_5_193.csv
C:\Users\okaza\Box\DataStrorage\2025_06_BL10XU\Okazaki\UODE15\FPD_1\UODE15_5_194.csv
C:\Users\okaza\Box\DataStrorage\2025_06_BL10XU\Okazaki\UODE15\FPD_1\UODE15_5_195.csv
C:\Users\okaza\Box\DataStrorage\2025_06_BL10XU\Okazaki\UODE15\FPD_1\UODE15_5_196.csv
C:\Users\okaza\Box\DataStrorage\2025_06_BL10XU\Okazaki\UODE15\FPD_1\UODE15_5_197.csv
C:\Users\okaza\Box\DataStrorage\2025_06_BL10XU\Okazaki\UODE15\FPD_1\UODE15_5_198.csv
C:\Users\okaza\Box\DataStrorage\2025_06_BL10XU\Okazaki\UODE15\FPD_1\UODE15_5_199.csv
C:\Users\okaza\Box\DataStrorage\2025_06_BL10XU\Okazaki\UODE15\FPD_1\UODE15_5_200.csv
C:\Users\okaza\Box\DataStrorage\2025_06_BL10XU\Okazaki\UODE15\FPD_1\UODE15_5_201.csv
C:\Users\okaza\Box\DataStrorage\2025_06_BL10XU\Okazaki\UODE15\FPD_1\UODE15_5_202.csv
C:\Users\okaza\Box\DataStrorage\2025_06_BL10XU\Okazaki\UODE15\FPD_1\UODE15_5_203.csv
C:\Users\okaza\Box\DataStrorage\2025_06_BL10XU\Okazaki\UODE15\FPD_1\UODE15_5_204.csv
C:\Users\okaza\Box\DataStrorage\2025_06_BL10XU\Okazaki\UODE15\FPD_1\UODE15_5_205.csv
C:\Users\okaza\Box\DataStrorage\2025_06_BL10XU\Okazaki\UODE15\FPD_1\UODE15_5_206.csv
C:\Users\okaza\Box\DataStrorage\2025_06_BL10XU\Okazaki\UODE15\FPD_1\UODE15_5_207.csv
C:\Users\okaza\Box\DataStrorage\2025_06_BL10XU\Okazaki\UODE15\FPD_1\UODE15_5_208.csv
C:\Users\okaza\Box\DataStrorage\2025_06_BL10XU\Okazaki\UODE15\FPD_1\UODE15_5_209.csv
C:\Users\okaza\Box\DataStrorage\2025_06_BL10XU\Okazaki\UODE15\FPD_1\UODE15_5_210.csv
C:\Users\okaza\Box\DataStrorage\2025_06_BL10XU\Okazaki\UODE15\FPD_1\UODE15_5_211.csv
C:\Users\okaza\Box\DataStrorage\2025_06_BL10XU\Okazaki\UODE15\FPD_1\UODE15_5_212.csv
C:\Users\okaza\Box\DataStrorage\2025_06_BL10XU\Okazaki\UODE15\FPD_1\UODE15_5_213.csv
C:\Users\okaza\Box\DataStrorage\2025_06_BL10XU\Okazaki\UODE15\FPD_1\UODE15_5_214.csv
C:\Users\okaza\Box\DataStrorage\2025_06_BL10XU\Okazaki\UODE15\FPD_1\UODE15_5_215.csv
C:\Users\okaza\Box\DataStrorage\2025_06_BL10XU\Okazaki\UODE15\FPD_1\UODE15_5_216.csv
├── inputs <class 'str'>
└── set_filelist
├── dir <class 'str'>
├── flist <class 'list'>
├── header <class 'str'>
└── footer <class 'str'>
データを読み込んで、hdfファイルに保存します。
In [7]:
import threading
import concurrent.futures as confu
In [8]:
def convertCSV2HDF():
"""更新履歴
* 2025/06/25: データの取得方法を変更
"""
# hdfファイルを読み込む場合はここを指定する
# hdffilename = r""
if not ("hdffilename" in locals()):
# 変数読み込み
dir = data["set_filelist"]["dir"]
flist = data["set_filelist"]["flist"]
n_frame = len(flist)
# hdfファイル初期化
key = sys._getframe().f_code.co_name
hdffilename = cachedir + "/" + key + ".hdf"
theta = pd.read_csv(dir + "/" + flist[0], header = None).values.T[0]
with h5py.File(hdffilename, mode = "w") as f:
f.create_dataset(
name = "2theta",
data = theta,
shape = theta.shape,
dtype = theta.dtype
)
g = f.create_group(
name = "intensity"
)
for i in tqdm(range(n_frame)):
g.create_dataset(
name = "frame = {}".format(i),
shape = theta.shape,
dtype = theta.dtype
)
# lock
lock = threading.Lock()
# 演算
with confu.ThreadPoolExecutor(max_workers=os.cpu_count()) as tpe:
# 演算開始
futures = [
tpe.submit(
lambda filename: (
int(os.path.splitext(filename)[0].replace(data["set_filelist"]["header"], "").replace(data["set_filelist"]["footer"], "")),
pd.read_csv(dir + "/" + filename, header = None).values.T[1]),
filename
) for filename in flist
]
# 終わったプロセスから順に出力
for i, future in enumerate(confu.as_completed(futures)):
intensity = future.result()
with lock:
j = intensity[0]
with h5py.File(hdffilename, mode = "r+") as f:
f["intensity"]["frame = {}".format(j)][:] = intensity[1] # type: ignore
simple_progress_bar(i+1, n_frame)
logger.info("[Save hdf]: " + os.path.abspath(hdffilename))
# 表示
data[key] = dict()
data[key]["hdf"] = hdffilename
logger.info("[Add variables]: " + key)
with h5py.File(hdffilename, mode = "r") as f:
h5_tree(f)
return
convertCSV2HDF()
del convertCSV2HDF
100%|██████████| 217/217 [00:00<00:00, 20571.47it/s]
Progress: [■■■■■■■■■■■■■■■■■■■■■■■■■■■■■■■■■■■■■■■-] 99% (214/217)
INFO 2025-08-04 14:11:53,891 [2896411365.py:62] [Save hdf]: c:\Users\okaza\pythonenv\fpd\fit_xrd_auto\.cache\convertCSV2HDF.hdf INFO 2025-08-04 14:11:53,893 [2896411365.py:67] [Add variables]: convertCSV2HDF
Progress: [■■■■■■■■■■■■■■■■■■■■■■■■■■■■■■■■■■■■■■■■] 100% (217/217)
<HDF5 file "convertCSV2HDF.hdf" (mode r)>
├── 2theta ((3800,), float64)
└── intensity
├── frame = 0 ((3800,), float64)
├── frame = 1 ((3800,), float64)
├── frame = 10 ((3800,), float64)
├── frame = 100 ((3800,), float64)
├── frame = 101 ((3800,), float64)
├── frame = 102 ((3800,), float64)
├── frame = 103 ((3800,), float64)
├── frame = 104 ((3800,), float64)
├── frame = 105 ((3800,), float64)
├── frame = 106 ((3800,), float64)
├── frame = 107 ((3800,), float64)
├── frame = 108 ((3800,), float64)
├── frame = 109 ((3800,), float64)
├── frame = 11 ((3800,), float64)
├── frame = 110 ((3800,), float64)
├── frame = 111 ((3800,), float64)
├── frame = 112 ((3800,), float64)
├── frame = 113 ((3800,), float64)
├── frame = 114 ((3800,), float64)
├── frame = 115 ((3800,), float64)
├── frame = 116 ((3800,), float64)
├── frame = 117 ((3800,), float64)
├── frame = 118 ((3800,), float64)
├── frame = 119 ((3800,), float64)
├── frame = 12 ((3800,), float64)
├── frame = 120 ((3800,), float64)
├── frame = 121 ((3800,), float64)
├── frame = 122 ((3800,), float64)
├── frame = 123 ((3800,), float64)
├── frame = 124 ((3800,), float64)
├── frame = 125 ((3800,), float64)
├── frame = 126 ((3800,), float64)
├── frame = 127 ((3800,), float64)
├── frame = 128 ((3800,), float64)
├── frame = 129 ((3800,), float64)
├── frame = 13 ((3800,), float64)
├── frame = 130 ((3800,), float64)
├── frame = 131 ((3800,), float64)
├── frame = 132 ((3800,), float64)
├── frame = 133 ((3800,), float64)
├── frame = 134 ((3800,), float64)
├── frame = 135 ((3800,), float64)
├── frame = 136 ((3800,), float64)
├── frame = 137 ((3800,), float64)
├── frame = 138 ((3800,), float64)
├── frame = 139 ((3800,), float64)
├── frame = 14 ((3800,), float64)
├── frame = 140 ((3800,), float64)
├── frame = 141 ((3800,), float64)
├── frame = 142 ((3800,), float64)
├── frame = 143 ((3800,), float64)
├── frame = 144 ((3800,), float64)
├── frame = 145 ((3800,), float64)
├── frame = 146 ((3800,), float64)
├── frame = 147 ((3800,), float64)
├── frame = 148 ((3800,), float64)
├── frame = 149 ((3800,), float64)
├── frame = 15 ((3800,), float64)
├── frame = 150 ((3800,), float64)
├── frame = 151 ((3800,), float64)
├── frame = 152 ((3800,), float64)
├── frame = 153 ((3800,), float64)
├── frame = 154 ((3800,), float64)
├── frame = 155 ((3800,), float64)
├── frame = 156 ((3800,), float64)
├── frame = 157 ((3800,), float64)
├── frame = 158 ((3800,), float64)
├── frame = 159 ((3800,), float64)
├── frame = 16 ((3800,), float64)
├── frame = 160 ((3800,), float64)
├── frame = 161 ((3800,), float64)
├── frame = 162 ((3800,), float64)
├── frame = 163 ((3800,), float64)
├── frame = 164 ((3800,), float64)
├── frame = 165 ((3800,), float64)
├── frame = 166 ((3800,), float64)
├── frame = 167 ((3800,), float64)
├── frame = 168 ((3800,), float64)
├── frame = 169 ((3800,), float64)
├── frame = 17 ((3800,), float64)
├── frame = 170 ((3800,), float64)
├── frame = 171 ((3800,), float64)
├── frame = 172 ((3800,), float64)
├── frame = 173 ((3800,), float64)
├── frame = 174 ((3800,), float64)
├── frame = 175 ((3800,), float64)
├── frame = 176 ((3800,), float64)
├── frame = 177 ((3800,), float64)
├── frame = 178 ((3800,), float64)
├── frame = 179 ((3800,), float64)
├── frame = 18 ((3800,), float64)
├── frame = 180 ((3800,), float64)
├── frame = 181 ((3800,), float64)
├── frame = 182 ((3800,), float64)
├── frame = 183 ((3800,), float64)
├── frame = 184 ((3800,), float64)
├── frame = 185 ((3800,), float64)
├── frame = 186 ((3800,), float64)
├── frame = 187 ((3800,), float64)
├── frame = 188 ((3800,), float64)
├── frame = 189 ((3800,), float64)
├── frame = 19 ((3800,), float64)
├── frame = 190 ((3800,), float64)
├── frame = 191 ((3800,), float64)
├── frame = 192 ((3800,), float64)
├── frame = 193 ((3800,), float64)
├── frame = 194 ((3800,), float64)
├── frame = 195 ((3800,), float64)
├── frame = 196 ((3800,), float64)
├── frame = 197 ((3800,), float64)
├── frame = 198 ((3800,), float64)
├── frame = 199 ((3800,), float64)
├── frame = 2 ((3800,), float64)
├── frame = 20 ((3800,), float64)
├── frame = 200 ((3800,), float64)
├── frame = 201 ((3800,), float64)
├── frame = 202 ((3800,), float64)
├── frame = 203 ((3800,), float64)
├── frame = 204 ((3800,), float64)
├── frame = 205 ((3800,), float64)
├── frame = 206 ((3800,), float64)
├── frame = 207 ((3800,), float64)
├── frame = 208 ((3800,), float64)
├── frame = 209 ((3800,), float64)
├── frame = 21 ((3800,), float64)
├── frame = 210 ((3800,), float64)
├── frame = 211 ((3800,), float64)
├── frame = 212 ((3800,), float64)
├── frame = 213 ((3800,), float64)
├── frame = 214 ((3800,), float64)
├── frame = 215 ((3800,), float64)
├── frame = 216 ((3800,), float64)
├── frame = 22 ((3800,), float64)
├── frame = 23 ((3800,), float64)
├── frame = 24 ((3800,), float64)
├── frame = 25 ((3800,), float64)
├── frame = 26 ((3800,), float64)
├── frame = 27 ((3800,), float64)
├── frame = 28 ((3800,), float64)
├── frame = 29 ((3800,), float64)
├── frame = 3 ((3800,), float64)
├── frame = 30 ((3800,), float64)
├── frame = 31 ((3800,), float64)
├── frame = 32 ((3800,), float64)
├── frame = 33 ((3800,), float64)
├── frame = 34 ((3800,), float64)
├── frame = 35 ((3800,), float64)
├── frame = 36 ((3800,), float64)
├── frame = 37 ((3800,), float64)
├── frame = 38 ((3800,), float64)
├── frame = 39 ((3800,), float64)
├── frame = 4 ((3800,), float64)
├── frame = 40 ((3800,), float64)
├── frame = 41 ((3800,), float64)
├── frame = 42 ((3800,), float64)
├── frame = 43 ((3800,), float64)
├── frame = 44 ((3800,), float64)
├── frame = 45 ((3800,), float64)
├── frame = 46 ((3800,), float64)
├── frame = 47 ((3800,), float64)
├── frame = 48 ((3800,), float64)
├── frame = 49 ((3800,), float64)
├── frame = 5 ((3800,), float64)
├── frame = 50 ((3800,), float64)
├── frame = 51 ((3800,), float64)
├── frame = 52 ((3800,), float64)
├── frame = 53 ((3800,), float64)
├── frame = 54 ((3800,), float64)
├── frame = 55 ((3800,), float64)
├── frame = 56 ((3800,), float64)
├── frame = 57 ((3800,), float64)
├── frame = 58 ((3800,), float64)
├── frame = 59 ((3800,), float64)
├── frame = 6 ((3800,), float64)
├── frame = 60 ((3800,), float64)
├── frame = 61 ((3800,), float64)
├── frame = 62 ((3800,), float64)
├── frame = 63 ((3800,), float64)
├── frame = 64 ((3800,), float64)
├── frame = 65 ((3800,), float64)
├── frame = 66 ((3800,), float64)
├── frame = 67 ((3800,), float64)
├── frame = 68 ((3800,), float64)
├── frame = 69 ((3800,), float64)
├── frame = 7 ((3800,), float64)
├── frame = 70 ((3800,), float64)
├── frame = 71 ((3800,), float64)
├── frame = 72 ((3800,), float64)
├── frame = 73 ((3800,), float64)
├── frame = 74 ((3800,), float64)
├── frame = 75 ((3800,), float64)
├── frame = 76 ((3800,), float64)
├── frame = 77 ((3800,), float64)
├── frame = 78 ((3800,), float64)
├── frame = 79 ((3800,), float64)
├── frame = 8 ((3800,), float64)
├── frame = 80 ((3800,), float64)
├── frame = 81 ((3800,), float64)
├── frame = 82 ((3800,), float64)
├── frame = 83 ((3800,), float64)
├── frame = 84 ((3800,), float64)
├── frame = 85 ((3800,), float64)
├── frame = 86 ((3800,), float64)
├── frame = 87 ((3800,), float64)
├── frame = 88 ((3800,), float64)
├── frame = 89 ((3800,), float64)
├── frame = 9 ((3800,), float64)
├── frame = 90 ((3800,), float64)
├── frame = 91 ((3800,), float64)
├── frame = 92 ((3800,), float64)
├── frame = 93 ((3800,), float64)
├── frame = 94 ((3800,), float64)
├── frame = 95 ((3800,), float64)
├── frame = 96 ((3800,), float64)
├── frame = 97 ((3800,), float64)
├── frame = 98 ((3800,), float64)
└── frame = 99 ((3800,), float64)
2. 全データの可視化¶
1次元データを線画で表示します。
In [161]:
def plot_profile():
# profileの間隔
aset = 0.2
# x軸の範囲
theta_range = (16,19)
# figのサイズ
size_inches = (3,4.5)
# 画像表示モード # "plot" or "imshow"
mode = "imshow"
mode = "plot"
if True: # Main
"""変更履歴
* 2025/08/01: 変更履歴作成
"""
# inputの保存
json_input = data["inputs"]
with open(json_input, mode = "r") as f:
inputs = json.load(f)
key = sys._getframe().f_code.co_name
inputs[key] = dict()
inputs["aset"] = aset
inputs["theta_range"] = theta_range
inputs["size_inches"] = size_inches
inputs["mode"] = mode
with open(json_input, mode = "w") as f:
json.dump(inputs, f, indent = 4)
# flag処理
if True:
try:
theta_range # type: ignore
except:
theta_range = None
switch_theta_range = False
else:
switch_theta_range = True
if not mode in ["plot", "imshow"]:
raise KeyError("mode must be 'plot' or 'imshow'")
# figureを作成
fig, ax = plt.subplots()
fig.set_size_inches(size_inches)
fig.set_dpi(300)
fig.subplots_adjust(
left = 0.08,
right = 0.92,
bottom = 0.08,
top = 0.92
)
# xデータを読み込み(共通)
with h5py.File(data["convertCSV2HDF"]["hdf"], mode = "r") as f:
theta = np.array(f["2theta"][()]) # type: ignore
# mask処理
mask = np.ones(theta.shape).astype(np.bool_)
if switch_theta_range:
mask[theta<theta_range[0]] = False # type: ignore
mask[theta>theta_range[1]] = False # type: ignore
# axを構成
ax.set_xlabel("2theta [degree]", fontsize = 10)
ax.autoscale(tight = True)
if switch_theta_range:
ax.set_xlim(*theta_range) # type: ignore
else:
ax.set_xlim(theta[0], theta[-1])
ax.set_yticks([])
if mode == "plot":
# すべてプロットする
n_frame = len(data["set_filelist"]["flist"])
with h5py.File(data["convertCSV2HDF"]["hdf"], mode = "r") as f:
for i in range(n_frame):
intensity = np.array(f["intensity"]["frame = {}".format(i)][()]) # type: ignore
ax.plot(
theta[mask],
(intensity + i*aset)[mask],
lw = 0.1,
c = "0"
)
simple_progress_bar(i+1, n_frame)
elif mode == "imshow":
# すべてをstack
n_frame = len(data["set_filelist"]["flist"])
d = []
with h5py.File(data["convertCSV2HDF"]["hdf"], mode = "r") as f:
for i in range(n_frame):
intensity = np.array(f["intensity"]["frame = {}".format(i)][()]) # type: ignore
d.append(intensity[mask])
simple_progress_bar(i+1, n_frame)
d = np.vstack(d)
ax.imshow(
d,
cmap = "gray",
extent = (theta[mask][0], theta[mask][-1], 0, n_frame),
origin = "lower",
aspect = "auto",
vmin = d.min(),
vmax = d.max(),
)
# 画像の表示
fig.canvas.draw()
img = im.frombuffer(
mode = "RGBA",
size = fig.canvas.get_width_height(),
data = fig.canvas.buffer_rgba(), # type: ignore
decoder_name = "raw"
)
key = sys._getframe().f_code.co_name
imgfilename = cachedir + "/{}.png".format(key)
img.save(imgfilename)
logger.debug("[Save fig]: " + os.path.abspath(imgfilename))
pdffilename = cachedir + "/{}.pdf".format(key)
plt.savefig(pdffilename)
logger.debug("[Save fig]: " + os.path.abspath(pdffilename))
plt.close()
display(Image(filename = imgfilename, width = size_inches[0]*100))
return
plot_profile()
del plot_profile
Progress: [■■■■■■■■■■■■■■■■■■■■■■■■■■■■■■■■■■■■■■■■] 100% (217/217)
DEBUG 2025-08-05 10:57:29,635 [2046979543.py:123] [Save fig]: c:\Users\okaza\pythonenv\fpd\fit_xrd_auto\.cache\plot_profile.png DEBUG 2025-08-05 10:57:30,174 [2046979543.py:126] [Save fig]: c:\Users\okaza\pythonenv\fpd\fit_xrd_auto\.cache\plot_profile.pdf
3. フィッティング¶
In [140]:
from modules.peakfit import peakfit, pseudoVoigt
import matplotlib.animation as anim
フィッティング範囲を絞ります。
In [164]:
def set_fitlim():
# profileの間隔
aset = 1
# x軸の範囲
theta_range = (16,19)
# fittingする範囲
fit_range = (17.5,18.5)
# グラフのサイズ
size_inches = (3,5)
if True: # Main
"""変更履歴
* 2025/08/01: 変更履歴作成
"""
# figureを作成
fig, ax = plt.subplots()
fig.set_size_inches(size_inches)
fig.set_dpi(300)
fig.subplots_adjust(
left = 0.08,
right = 0.92,
bottom = 0.08,
top = 0.92
)
# xデータを読み込み(共通)
with h5py.File(data["convertCSV2HDF"]["hdf"], mode = "r") as f:
theta = np.array(f["2theta"][()]) # type: ignore
# mask処理
mask = np.ones(theta.shape).astype(np.bool_)
mask[theta < theta_range[0]] = False
mask[theta > theta_range[1]] = False
fitmask = np.ones(theta.shape).astype(np.bool_)
fitmask[theta < fit_range[0]] = False
fitmask[theta > fit_range[1]] = False
# すべてプロットする
n_frame = len(data["set_filelist"]["flist"])
with h5py.File(data["convertCSV2HDF"]["hdf"], mode = "r") as f:
for i in range(n_frame):
intensity = np.array(f["intensity"]["frame = {}".format(i)][()]) # type: ignore
ax.plot(
theta[mask],
(intensity + i*aset)[mask],
lw = 0.1,
c = "0"
)
ax.plot(
theta[fitmask],
(intensity + i*aset)[fitmask],
lw = 0.1,
c = "tab:orange"
)
simple_progress_bar(i+1, n_frame)
# axを構成
ax.set_xlabel("2theta [degree]", fontsize = 10)
ax.autoscale(tight = True)
ax.set_xlim(*theta_range) # type: ignore
ylim = ax.get_ylim()
# 画像の表示
fig.canvas.draw()
img = im.frombuffer(
mode = "RGBA",
size = fig.canvas.get_width_height(),
data = fig.canvas.buffer_rgba(), # type: ignore
decoder_name = "raw"
)
key = sys._getframe().f_code.co_name
imgfilename = cachedir + "/{}.png".format(key)
img.save(imgfilename)
logger.debug("[Save fig]: " + os.path.abspath(imgfilename))
pdffilename = cachedir + "/{}.pdf".format(key)
plt.savefig(pdffilename)
logger.debug("[Save fig]: " + os.path.abspath(pdffilename))
plt.close()
display(Image(filename = imgfilename, width = size_inches[0]*100))
# データ格納
data[key] = dict()
data[key]["theta_range"] = theta_range
data[key]["aset"] = aset
data[key]["size_inches"] = size_inches
data[key]["fit_range"] = fit_range
data[key]["ylim"] = ylim
logger.info("[Add variables]: {}".format(key))
dict_tree(data)
return
set_fitlim()
del set_fitlim
Progress: [■■■■■■■■■■■■■■■■■■■■■■■■■■■■■■■■■■■■■■■■] 100% (217/217)
DEBUG 2025-08-05 10:58:38,082 [2936025466.py:80] [Save fig]: c:\Users\okaza\pythonenv\fpd\fit_xrd_auto\.cache\set_fitlim.png DEBUG 2025-08-05 10:58:38,668 [2936025466.py:83] [Save fig]: c:\Users\okaza\pythonenv\fpd\fit_xrd_auto\.cache\set_fitlim.pdf
INFO 2025-08-05 10:58:38,706 [2936025466.py:94] [Add variables]: set_fitlim
├── inputs <class 'str'>
├── set_filelist
│ ├── dir <class 'str'>
│ ├── flist <class 'list'>
│ ├── header <class 'str'>
│ └── footer <class 'str'>
├── convertCSV2HDF
│ └── hdf <class 'str'>
├── set_fitlim
│ ├── theta_range <class 'tuple'>
│ ├── aset <class 'int'>
│ ├── size_inches <class 'tuple'>
│ ├── fit_range <class 'tuple'>
│ └── ylim <class 'tuple'>
├── autofit
│ └── res <class 'list'>
└── export_html
└── ipynb <class 'str'>
In [165]:
def autofit():
"""変更履歴
* 2025/08/01: 変更履歴作成
"""
# 角度データを読み込み
with h5py.File(data["convertCSV2HDF"]["hdf"], mode = "r") as f:
theta = np.array(f["2theta"][()]) # type: ignore
# 強度データを取得
n_frame = len(data["set_filelist"]["flist"])
intensities = list()
with h5py.File(data["convertCSV2HDF"]["hdf"], mode = "r") as f:
for i in range(n_frame):
intensities.append(np.array(f["intensity"]["frame = {}".format(i)][()])) # type: ignore
intensities = np.vstack(intensities)
# mask処理
mask = np.ones(theta.shape).astype(np.bool_)
mask[theta < data["set_fitlim"]["fit_range"][0]] = False
mask[theta > data["set_fitlim"]["fit_range"][1]] = False
theta_fit = theta[mask]
intensities_fit = intensities.T[mask].T
# 出力格納用変数を定義
res = [None]*n_frame
# フィット用関数を定義
pf = peakfit()
def process(i):
_res = pf.fit_Vigot_func(theta = theta_fit, intensity = intensities_fit[i])
res_dict = dict()
for j, k in enumerate(["popt", "pcov"]):
res_dict[k] = dict()
for l, m in enumerate(pf.variables(nop = 1)):
res_dict[k][m] = _res[j][l]
res_dict["r2"] = _res[2]
return i, res_dict
# マルチスレッドを使いながら演算
with confu.ThreadPoolExecutor() as tpe:
futures = [tpe.submit(process, i_frame) for i_frame in np.arange(n_frame)]
for i, future in enumerate(confu.as_completed(futures)):
i_frame, val = future.result()
res[i_frame] = val # type: ignore
simple_progress_bar(i+1, n_frame)
# データ格納
key = sys._getframe().f_code.co_name
data[key] = dict()
data[key]["res"] = res
logger.debug("[Add variable]: res")
logger.info("size of data: {} MB".format(sys.getsizeof(data)%1024%1024))
# データ保存
jsonfile = os.path.join(cachedir, key+".json")
with open(jsonfile, mode = "w") as f:
json.dump(res, f, indent = 4)
logger.info("[Save json]: " + os.path.abspath(jsonfile))
dict_tree(data)
return
autofit()
del autofit
Progress: [■■■■■■■■■■■■■■■■■■■■■■■■■■■■■■■■■■■■■■■-] 98% (213/217)
DEBUG 2025-08-05 10:58:47,358 [942604827.py:53] [Add variable]: res INFO 2025-08-05 10:58:47,361 [942604827.py:54] size of data: 272 MB INFO 2025-08-05 10:58:47,379 [942604827.py:60] [Save json]: c:\Users\okaza\pythonenv\fpd\fit_xrd_auto\.cache\autofit.json
Progress: [■■■■■■■■■■■■■■■■■■■■■■■■■■■■■■■■■■■■■■■■] 100% (217/217)
├── inputs <class 'str'>
├── set_filelist
│ ├── dir <class 'str'>
│ ├── flist <class 'list'>
│ ├── header <class 'str'>
│ └── footer <class 'str'>
├── convertCSV2HDF
│ └── hdf <class 'str'>
├── set_fitlim
│ ├── theta_range <class 'tuple'>
│ ├── aset <class 'int'>
│ ├── size_inches <class 'tuple'>
│ ├── fit_range <class 'tuple'>
│ └── ylim <class 'tuple'>
├── autofit
│ └── res <class 'list'>
└── export_html
└── ipynb <class 'str'>
フィッティング結果を出力します。
In [166]:
def plot_res():
"""変更履歴
* 2025/08/01: 変更履歴作成
"""
# 変数を読み込み
theta_range = data["set_fitlim"]["theta_range"]
fit_range = data["set_fitlim"]["fit_range"]
aset = data["set_fitlim"]["aset"]
size_inches = data["set_fitlim"]["size_inches"]
ylim = data["set_fitlim"]["ylim"]
# figureを作成
fig, ax = plt.subplots()
fig.set_size_inches(size_inches)
fig.set_dpi(300)
fig.subplots_adjust(
left = 0.08,
right = 0.92,
bottom = 0.08,
top = 0.92
)
# xデータを読み込み(共通)
with h5py.File(data["convertCSV2HDF"]["hdf"], mode = "r") as f:
theta = np.array(f["2theta"][()]) # type: ignore
# mask処理
mask = np.ones(theta.shape).astype(np.bool_)
mask[theta < theta_range[0]] = False
mask[theta > theta_range[1]] = False
fitmask = np.ones(theta.shape).astype(np.bool_)
fitmask[theta < fit_range[0]] = False
fitmask[theta > fit_range[1]] = False
# すべてプロットする
n_frame = len(data["set_filelist"]["flist"])
pf = peakfit()
with h5py.File(data["convertCSV2HDF"]["hdf"], mode = "r") as f:
for i in range(n_frame):
intensity = np.array(f["intensity"]["frame = {}".format(i)][()]) # type: ignore
ax.plot(
theta[mask],
(intensity + i*aset)[mask],
lw = 0.1,
c = "0"
)
x = np.linspace(theta[fitmask][0], theta[fitmask][-1], 200)
y: np.ndarray = pseudoVoigt(
x,
*[data["autofit"]["res"][i]["popt"][j] for j in pf.variables()]
) # type: ignore
ax.plot(
x,
y + i*aset,
lw = 0.1,
c = "tab:orange"
)
ax.plot(
[data["autofit"]["res"][i]["popt"]["mu"]],
[
data["autofit"]["res"][i]["popt"]["amp"]
+ data["autofit"]["res"][i]["popt"]["b0"]
+ data["autofit"]["res"][i]["popt"]["b1"] * data["autofit"]["res"][i]["popt"]["mu"]
+ i*aset
],
lw = 0,
marker = "o",
ms = 1,
mec = "tab:orange",
c = "1",
mew = 0.1
)
simple_progress_bar(i+1, n_frame)
# axを構成
ax.set_xlabel("2theta [degree]", fontsize = 10)
ax.autoscale(tight = True)
ax.set_xlim(*theta_range) # type: ignore
ax.set_ylim(ylim)
# 画像の表示
fig.canvas.draw()
img = im.frombuffer(
mode = "RGBA",
size = fig.canvas.get_width_height(),
data = fig.canvas.buffer_rgba(), # type: ignore
decoder_name = "raw"
)
key = sys._getframe().f_code.co_name
imgfilename = cachedir + "/{}.png".format(key)
img.save(imgfilename)
logger.debug("[Save fig]: " + os.path.abspath(imgfilename))
pdffilename = cachedir + "/{}.pdf".format(key)
plt.savefig(pdffilename)
logger.debug("[Save fig]: " + os.path.abspath(pdffilename))
plt.close()
display(Image(filename = imgfilename, width = size_inches[0]*100))
return
plot_res()
del plot_res
Progress: [■■■■■■■■■■■■■■■■■■■■■■■■■■■■■■■■■■■■■■■■] 100% (217/217)
DEBUG 2025-08-05 10:58:50,793 [2237340504.py:95] [Save fig]: c:\Users\okaza\pythonenv\fpd\fit_xrd_auto\.cache\plot_res.png DEBUG 2025-08-05 10:58:51,550 [2237340504.py:98] [Save fig]: c:\Users\okaza\pythonenv\fpd\fit_xrd_auto\.cache\plot_res.pdf
フィッティング結果の可視化をします。
In [167]:
def show_fitting_result():
"""変更履歴
* 2025/08/04: 作成
"""
# jsonファイルを読み込む
key = "autofit"
jsonfile = cachedir + "/{}.json".format(key)
with open(jsonfile, mode = "r") as f:
j = json.load(f)
logger.info("[Load json]: " + os.path.abspath(jsonfile))
# データフレームを作成
x = np.arange(len(j))
mu = [j[i]["popt"]["mu"] for i in range(len(j))]
err_mu = np.sqrt(np.array([j[i]["pcov"]["mu"] for i in range(len(j))]))
fwhm_g = np.array([j[i]["popt"]["fwhm_g"] for i in range(len(j))])
fwhm_l = np.array([j[i]["popt"]["fwhm_l"] for i in range(len(j))])
eta = np.array([j[i]["popt"]["eta"] for i in range(len(j))])
r2 = np.array([j[i]["r2"] for i in range(len(j))])
# figureを作成
fig = plt.figure()
size_inches = (8,12)
fig.set_size_inches(size_inches)
fig.set_dpi(300)
# mu
ax_mu = fig.add_axes(
rect = (
0.1,
0.7375,
0.8,
0.2
)
)
ax_mu.errorbar(
x,
mu,
yerr = err_mu,
lw = 0,
fmt = "o",
ms = 6,
elinewidth = 1,
ecolor = "0",
mfc = "1",
mec = "0"
)
ax_mu.set_ylabel("Peak position [deg]", fontsize = 10)
ylim_mu = ax_mu.get_ylim()
range_mu = ylim_mu[1]-ylim_mu[0]
ax_mu.set_xticklabels([])
# fwhm
ax_fwhm = fig.add_axes(
rect = (
0.1,
0.5125,
0.8,
0.2
)
)
ax_fwhm.errorbar(
x,
fwhm_g,
# yerr = err_fwhm_g,
lw = 0,
fmt = "^",
ms = 6,
elinewidth = 1,
ecolor = "tab:blue",
mfc = "1",
mec = "tab:blue",
label = "gaussian"
)
ax_fwhm.errorbar(
x,
fwhm_l,
# yerr = err_fwhm_l,
lw = 0,
fmt = "v",
ms = 6,
elinewidth = 1,
ecolor = "tab:orange",
mfc = "1",
mec = "tab:orange",
label = "lorentzian"
)
ax_fwhm.set_ylim((0, range_mu*20))
ax_fwhm.legend()
ax_fwhm.set_ylabel("Full width at half maximum [deg]", fontsize = 10)
ax_fwhm.set_xticklabels([])
# eta
ax_eta = fig.add_axes(
rect = (
0.1,
0.2875,
0.8,
0.2
)
)
ax_eta.errorbar(
x,
eta,
# yerr = err,
lw = 0,
fmt = "o",
ms = 6,
elinewidth = 1,
ecolor = "0",
mfc = "1",
mec = "0"
)
ax_eta.set_ylabel("Gaussian rate", fontsize = 10)
ax_eta.set_ylim(-0,1)
ax_eta.set_xticklabels([])
# fitting
ax_r2 = fig.add_axes(
rect = (
0.1,
0.0625,
0.8,
0.2
)
)
ax_r2.errorbar(
x,
r2,
# yerr = err,
lw = 0,
fmt = "o",
ms = 6,
elinewidth = 1,
ecolor = "0",
mfc = "1",
mec = "0"
)
ax_r2.set_ylabel("Determination coefficient", fontsize = 10)
ax_r2.set_ylim(0.8,1)
ax_r2.set_xlabel("Frame", fontsize = 10)
# 画像の表示
fig.canvas.draw()
img = im.frombuffer(
mode = "RGBA",
size = fig.canvas.get_width_height(),
data = fig.canvas.buffer_rgba(), # type: ignore
decoder_name = "raw"
)
key = sys._getframe().f_code.co_name
imgfilename = cachedir + "/{}.png".format(key)
img.save(imgfilename)
logger.debug("[Save fig]: " + os.path.abspath(imgfilename))
# pdffilename = cachedir + "/{}/{}.pdf".format(peakname, key)
# plt.savefig(pdffilename)
# logger.debug("[Save fig]: " + os.path.abspath(pdffilename))
plt.close()
display(Image(filename = imgfilename, width = size_inches[0]*100))
return
show_fitting_result()
del show_fitting_result
INFO 2025-08-05 10:58:55,640 [2296972354.py:12] [Load json]: c:\Users\okaza\pythonenv\fpd\fit_xrd_auto\.cache\autofit.json DEBUG 2025-08-05 10:58:56,227 [2296972354.py:158] [Save fig]: c:\Users\okaza\pythonenv\fpd\fit_xrd_auto\.cache\show_fitting_result.png
csvで保存します
In [176]:
def save_csv():
"""変更履歴
* 2025/08/05: セル出力を増やしました
* 2025/08/01: 変更履歴作成
"""
# jsonファイルを読み込む
key = "autofit"
jsonfile = cachedir + "/{}.json".format(key)
with open(jsonfile, mode = "r") as f:
j = json.load(f)
logger.info("[Load json]: " + os.path.abspath(jsonfile))
# データフレームを作成
mu = [j[i]["popt"]["mu"] for i in range(len(j))]
err_mu = np.sqrt(np.array([j[i]["pcov"]["mu"] for i in range(len(j))]))
fwhm_g = [j[i]["popt"]["fwhm_g"] for i in range(len(j))]
fwhm_l = [j[i]["popt"]["fwhm_l"] for i in range(len(j))]
eta = [j[i]["popt"]["eta"] for i in range(len(j))]
r2 = [j[i]["r2"] for i in range(len(j))]
df = pd.DataFrame(dict(
twotheta = mu,
err_twotheta = err_mu,
fwhm_g = fwhm_g,
fwhm_l = fwhm_l,
eta = eta,
r2 = r2,
))
logger.info("pd.DataFrame made")
# データ保存
key = sys._getframe().f_code.co_name
csvfile = os.path.join(cachedir, key + ".csv")
df.to_csv(csvfile)
logger.info("[Save csv]: " + os.path.abspath(csvfile))
# 出力
display(df.head())
df.info()
return
save_csv()
del save_csv
INFO 2025-08-05 11:05:09,864 [605604950.py:13] [Load json]: c:\Users\okaza\pythonenv\fpd\fit_xrd_auto\.cache\autofit.json INFO 2025-08-05 11:05:09,868 [605604950.py:30] pd.DataFrame made INFO 2025-08-05 11:05:09,876 [605604950.py:36] [Save csv]: c:\Users\okaza\pythonenv\fpd\fit_xrd_auto\.cache\save_csv.csv
| twotheta | err_twotheta | fwhm_g | fwhm_l | eta | r2 | |
|---|---|---|---|---|---|---|
| 0 | 17.885150 | 0.000364 | 0.126706 | 0.124332 | 0.452317 | 0.997467 |
| 1 | 17.885373 | 0.000325 | 0.130994 | 0.123095 | 0.517128 | 0.998024 |
| 2 | 17.884389 | 0.000321 | 0.123067 | 0.131105 | 0.380999 | 0.998083 |
| 3 | 17.884496 | 0.000416 | 0.134081 | 0.125422 | 0.417149 | 0.996886 |
| 4 | 17.885583 | 0.000372 | 0.131608 | 0.128037 | 0.517359 | 0.997501 |
<class 'pandas.core.frame.DataFrame'> RangeIndex: 217 entries, 0 to 216 Data columns (total 6 columns): # Column Non-Null Count Dtype --- ------ -------------- ----- 0 twotheta 217 non-null float64 1 err_twotheta 217 non-null float64 2 fwhm_g 217 non-null float64 3 fwhm_l 217 non-null float64 4 eta 217 non-null float64 5 r2 217 non-null float64 dtypes: float64(6) memory usage: 10.3 KB
4. エクスポート¶
In [177]:
import subprocess
In [ ]:
def export_html():
"""変更履歴
* 2025/08/01: 変更履歴作成
"""
# ipynbファイルを検知
dirname = os.path.dirname(cachedir)
for i in os.listdir(dirname):
if os.path.splitext(i)[1] == ".ipynb":
notebook_name = os.path.join(dirname, i)
break
logger.info("[Find notebook]: " + os.path.abspath(notebook_name))
# htmlに変換
subprocess.run(["jupyter", "nbconvert", "--to", "html", notebook_name])
logger.info("[Subprocess]: Export as html file")
# データ格納
key = sys._getframe().f_code.co_name
data[key] = dict()
data[key]["ipynb"] = notebook_name
logger.info("size of data: {} MB".format(sys.getsizeof(data)%1024%1024))
dict_tree(data)
return
export_html()
del export_html
INFO 2025-08-05 11:05:17,307 [1158074776.py:13] [Find notebook]: c:\Users\okaza\pythonenv\fpd\fit_xrd_auto\fit_xrd_auto.ipynb
データを保存します
In [156]:
def save_data():
"""変更履歴
* 2025/08/01: 変更履歴作成
"""
# tkinterでファイルダイアログを開く
window = Tk()
window.wm_attributes("-topmost", 1)
window.withdraw()
initialfilename = os.path.basename(os.path.splitext(data["export_html"]["ipynb"])[0]) + ".dir"
filename = filedialog.asksaveasfilename(
parent = window,
filetypes = [
("DIRECTORY", "*.dir"),
("csv", "*.csv"),
("json", "*.json"),
("html", "*.html"),
],
initialfile=initialfilename,
defaultextension=".dir"
)
print(filename)
# # 分岐
if os.path.splitext(filename)[1] == ".csv":
original = os.path.join(cachedir, "save_csv.csv")
logger.debug("Original file being copied...: " + os.path.abspath(original))
shutil.copy2(
src = original,
dst = filename
)
logger.info("File copied: " + os.path.abspath(filename))
elif os.path.splitext(filename)[1] == ".json":
original = os.path.join(cachedir, "autofit.json")
logger.debug("Original file being copied...: " + os.path.abspath(original))
shutil.copy2(
src = original,
dst = filename
)
logger.info("File copied: " + os.path.abspath(filename))
elif os.path.splitext(filename)[1] == ".html":
original = os.path.splitext(data["export_html"]["ipynb"])[0] + ".html"
logger.debug("Original file being copied...: " + os.path.abspath(original))
shutil.copy2(
src = original,
dst = filename
)
logger.info("File copied: " + os.path.abspath(filename))
elif os.path.splitext(filename)[1] == ".dir":
dirname = os.path.splitext(filename)[0]
logger.debug("Original directory being copied...: " + os.path.abspath(os.getcwd()))
if os.path.exists(dirname):
shutil.rmtree(dirname)
shutil.copytree(
src = os.getcwd(),
dst = dirname,
dirs_exist_ok = True
)
for f in os.listdir(dirname):
if os.path.splitext(f)[1] == ".ipynb":
os.remove(os.path.join(dirname, f))
logger.info("Directory copied: " + os.path.abspath(dirname))
return
save_data()
del save_data
DEBUG 2025-08-05 10:51:30,423 [318837294.py:47] Original file being copied...: c:\Users\okaza\pythonenv\fpd\fit_xrd_auto\fit_xrd_auto.html INFO 2025-08-05 10:51:30,442 [318837294.py:52] File copied: C:\Users\okaza\Box\DataStrorage\SPring8-2025-06\Okazaki\UODE15\analyze\xrd_fitting\Pyrite111.html
C:/Users/okaza/Box/DataStrorage/SPring8-2025-06/Okazaki/UODE15/analyze/xrd_fitting/Pyrite111.html